f064c1
@@ -27,6 +27,9 @@
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hive.hcatalog.common.HCatConstants;
+import org.apache.hive.hcatalog.common.HCatUtil;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
 
 /**
  * The InputFormat to use to read data from HCatalog.
@@ -140,4 +143,31 @@
public HCatInputFormat setProperties(Properties properties) throws IOException {
     }
     return this;
   }
+
+  /**
+   * Return partitioning columns for this input, can only be called after setInput is called.
+   * @return partitioning columns of the table specified by the job.
+   * @throws IOException
+   */
+  public static HCatSchema getPartitionColumns(Configuration conf) throws IOException {
+    InputJobInfo inputInfo = (InputJobInfo) HCatUtil.deserialize(
+        conf.get(HCatConstants.HCAT_KEY_JOB_INFO));
+    Preconditions.checkNotNull(inputInfo,
+        "inputJobInfo is null, setInput has not yet been called to save job into conf supplied.");
+    return inputInfo.getTableInfo().getPartitionColumns();
+
+  }
+
+  /**
+   * Return data columns for this input, can only be called after setInput is called.
+   * @return data columns of the table specified by the job.
+   * @throws IOException
+   */
+  public static HCatSchema getDataColumns(Configuration conf) throws IOException {
+    InputJobInfo inputInfo = (InputJobInfo) HCatUtil.deserialize(
+        conf.get(HCatConstants.HCAT_KEY_JOB_INFO));
+    Preconditions.checkNotNull(inputInfo,
+        "inputJobInfo is null, setInput has not yet been called to save job into conf supplied.");
+    return inputInfo.getTableInfo().getDataColumns();
+  }
 }
